x86, spinlock: Get rid of .text.lock out-of-line section.
authorKeir Fraser <keir.fraser@citrix.com>
Mon, 20 Oct 2008 16:17:55 +0000 (17:17 +0100)
committerKeir Fraser <keir.fraser@citrix.com>
Mon, 20 Oct 2008 16:17:55 +0000 (17:17 +0100)
We don't care about code bloat now that spinlock operations are not
inlined into callers. This will make backtraces easier to read.

Signed-off-by: Keir Fraser <keir.fraser@citrix.com>
xen/arch/x86/x86_32/xen.lds.S
xen/arch/x86/x86_64/xen.lds.S
xen/include/asm-x86/rwlock.h
xen/include/asm-x86/spinlock.h

index 2c3d21fcede130cc659e7c83143bcfea2b25db18..99fffca441dfbdcec823a77c49100d73f607369a 100644 (file)
@@ -26,7 +26,6 @@ SECTIONS
        *(.fixup)
        *(.gnu.warning)
        } :text =0x9090
-  .text.lock : { *(.text.lock) } :text /* out-of-line lock text */
 
   _etext = .;                  /* End of text section */
 
index 55559f4678c35d11e58a566fd432df6eb498126f..4f840107cea715814b6352eda99aa30aa12624cb 100644 (file)
@@ -24,7 +24,6 @@ SECTIONS
        *(.fixup)
        *(.gnu.warning)
        } :text = 0x9090
-  .text.lock : { *(.text.lock) } :text /* out-of-line lock text */
 
   _etext = .;                  /* End of text section */
 
index e8c8846cb5fbd1c39e3a42f6d9c39b36579f9118..e4474d4a259b2791b00586a311ccf0dc6e1673e5 100644 (file)
 
 #define __build_read_lock_ptr(rw, helper)   \
        asm volatile(LOCK "subl $1,(%0)\n\t" \
-                    "js 2f\n" \
+                    "jns 1f\n\t" \
+                    "call " helper "\n\t" \
                     "1:\n" \
-                    ".section .text.lock,\"ax\"\n" \
-                    "2:\tcall " helper "\n\t" \
-                    "jmp 1b\n" \
-                    ".previous" \
                     ::"a" (rw) : "memory")
 
 #define __build_read_lock_const(rw, helper)   \
        asm volatile(LOCK "subl $1,%0\n\t" \
-                    "js 2f\n" \
-                    "1:\n" \
-                    ".section .text.lock,\"ax\"\n" \
-                    "2:\tpush %%"__OP"ax\n\t" \
+                    "jns 1f\n\t" \
+                    "push %%"__OP"ax\n\t" \
                     "lea %0,%%"__OP"ax\n\t" \
                     "call " helper "\n\t" \
                     "pop %%"__OP"ax\n\t" \
-                    "jmp 1b\n" \
-                    ".previous" \
+                    "1:\n" \
                     :"=m" (*(volatile int *)rw) : : "memory")
 
 #define __build_read_lock(rw, helper)  do { \
 
 #define __build_write_lock_ptr(rw, helper) \
        asm volatile(LOCK "subl $" RW_LOCK_BIAS_STR ",(%0)\n\t" \
-                    "jnz 2f\n" \
+                    "jz 1f\n\t" \
+                    "call " helper "\n\t" \
                     "1:\n" \
-                    ".section .text.lock,\"ax\"\n" \
-                    "2:\tcall " helper "\n\t" \
-                    "jmp 1b\n" \
-                    ".previous" \
                     ::"a" (rw) : "memory")
 
 #define __build_write_lock_const(rw, helper) \
        asm volatile(LOCK "subl $" RW_LOCK_BIAS_STR ",(%0)\n\t" \
-                    "jnz 2f\n" \
-                    "1:\n" \
-                    ".section .text.lock,\"ax\"\n" \
-                    "2:\tpush %%"__OP"ax\n\t" \
+                    "jz 1f\n\t" \
+                    "push %%"__OP"ax\n\t" \
                     "lea %0,%%"__OP"ax\n\t" \
                     "call " helper "\n\t" \
                     "pop %%"__OP"ax\n\t" \
-                    "jmp 1b\n" \
-                    ".previous" \
+                    "1:\n" \
                     :"=m" (*(volatile int *)rw) : : "memory")
 
 #define __build_write_lock(rw, helper) do { \
index 7dc1da0bd8f110f36ddd3473073fea11a6c94c66..f5503a282052cbc19001ce91be04211469a4a3d4 100644 (file)
@@ -18,14 +18,13 @@ typedef struct {
 static inline void _raw_spin_lock(raw_spinlock_t *lock)
 {
     asm volatile (
-        "1:  lock; decb %0         \n"
-        "    js 2f                 \n"
-        ".section .text.lock,\"ax\"\n"
+        "1:  lock; decw %0         \n"
+        "    jns 3f                \n"
         "2:  rep; nop              \n"
-        "    cmpb $0,%0            \n"
+        "    cmpw $0,%0            \n"
         "    jle 2b                \n"
         "    jmp 1b                \n"
-        ".previous"
+        "3:"
         : "=m" (lock->lock) : : "memory" );
 }
 
@@ -33,16 +32,16 @@ static inline void _raw_spin_unlock(raw_spinlock_t *lock)
 {
     ASSERT(_raw_spin_is_locked(lock));
     asm volatile (
-        "movb $1,%0" 
+        "movw $1,%0" 
         : "=m" (lock->lock) : : "memory" );
 }
 
 static inline int _raw_spin_trylock(raw_spinlock_t *lock)
 {
-    char oldval;
+    s16 oldval;
     asm volatile (
-        "xchgb %b0,%1"
-        :"=q" (oldval), "=m" (lock->lock)
+        "xchgw %w0,%1"
+        :"=r" (oldval), "=m" (lock->lock)
         :"0" (0) : "memory" );
     return (oldval > 0);
 }